[dependencies.semver]
git = "https://github.com/rust-lang/semver"
+[dependencies.curl]
+git = "https://github.com/carllerche/curl-rust"
+
[dependencies.tar]
git = "https://github.com/alexcrichton/tar-rs"
$macro!(generate_lockfile)
$macro!(git_checkout)
$macro!(locate_project)
+ $macro!(login)
$macro!(new)
$macro!(package)
$macro!(read_manifest)
$macro!(run)
$macro!(test)
$macro!(update)
+ $macro!(upload)
$macro!(verify_project)
$macro!(version)
}) )
--- /dev/null
+use std::io;
+use docopt;
+
+use cargo::ops;
+use cargo::core::{MultiShell};
+use cargo::sources::RegistrySource;
+use cargo::util::{CliResult, CliError};
+
+docopt!(Options, "
+Save an api token from the registry locally
+
+Usage:
+ cargo login [options] [<token>]
+
+Options:
+ -h, --help Print this message
+ --host HOST Host to set the token for
+ -v, --verbose Use verbose output
+
+", arg_token: Option<String>, flag_host: Option<String>)
+
+pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
+ shell.set_verbose(options.flag_verbose);
+ let token = match options.arg_token.clone() {
+ Some(token) => token,
+ None => {
+ let default = RegistrySource::url().unwrap().to_string();
+ let host = options.flag_host.unwrap_or(default);
+ println!("please visit {}/me and paste the API Token below", host);
+ try!(io::stdin().read_line().map_err(|e| {
+ CliError::from_boxed(box e, 101)
+ }))
+ }
+ };
+
+ let token = token.as_slice().trim().to_string();
+ try!(ops::upload_login(shell, token).map_err(|e| {
+ CliError::from_boxed(e, 101)
+ }));
+ Ok(None)
+}
+
--- /dev/null
+use docopt;
+
+use cargo::ops;
+use cargo::core::{MultiShell};
+use cargo::util::{CliResult, CliError};
+use cargo::util::important_paths::find_root_manifest_for_cwd;
+
+docopt!(Options, "
+Upload a package to the registry
+
+Usage:
+ cargo upload [options]
+
+Options:
+ -h, --help Print this message
+ --host HOST Host to upload the package to
+ --token TOKEN Token to use when uploading
+ --manifest-path PATH Path to the manifest to compile
+ -v, --verbose Use verbose output
+
+", flag_host: Option<String>, flag_token: Option<String>,
+ flag_manifest_path: Option<String>)
+
+pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
+ shell.set_verbose(options.flag_verbose);
+ let Options {
+ flag_token: token,
+ flag_host: host,
+ flag_manifest_path,
+ ..
+ } = options;
+
+ let root = try!(find_root_manifest_for_cwd(flag_manifest_path.clone()));
+ ops::upload(&root, shell, token, host).map(|_| None).map_err(|err| {
+ CliError::from_boxed(err, 101)
+ })
+}
}
}
- pub fn get(&self, package_ids: &[PackageId]) -> CargoResult<Vec<Package>> {
+ pub fn get(&mut self, package_ids: &[PackageId]) -> CargoResult<Vec<Package>> {
log!(5, "getting packags; sources={}; ids={}", self.sources.len(),
package_ids);
// source
let mut ret = Vec::new();
- for source in self.sources.sources() {
+ for source in self.sources.sources_mut() {
try!(source.download(package_ids));
let packages = try!(source.get(package_ids));
use url::Url;
use core::{Summary, Package, PackageId, Registry, Dependency};
-use sources::{PathSource, GitSource, DummyRegistrySource};
+use sources::{PathSource, GitSource, RegistrySource};
use sources::git;
use util::{human, Config, CargoResult, CargoError, ToUrl};
/// The download method fetches the full package for each name and
/// version specified.
- fn download(&self, packages: &[PackageId]) -> CargoResult<()>;
+ fn download(&mut self, packages: &[PackageId]) -> CargoResult<()>;
/// The get method returns the Path of each specified package on the
/// local file system. It assumes that `download` was already called,
}
Ok(())
},
- SourceId { kind: RegistryKind, .. } => {
- // TODO: Central registry vs. alternates
- write!(f, "the package registry")
+ SourceId { kind: RegistryKind, ref url, .. } => {
+ let default = RegistrySource::url().ok();
+ if default.as_ref() == Some(url) {
+ write!(f, "the package registry")
+ } else {
+ write!(f, "registry {}", url)
+ }
}
}
}
let precise = mem::replace(&mut url.fragment, None);
SourceId::for_git(&url, reference.as_slice(), precise)
},
- "registry" => SourceId::for_central(),
+ "registry" => {
+ let url = url.to_url().unwrap();
+ SourceId::for_registry(&url)
+ }
"path" => SourceId::for_path(&Path::new(url.slice_from(5))).unwrap(),
_ => fail!("Unsupported serialized SourceId")
}
id
}
- pub fn for_central() -> SourceId {
- SourceId::new(RegistryKind,
- "https://example.com".to_url().unwrap())
+ pub fn for_registry(url: &Url) -> SourceId {
+ SourceId::new(RegistryKind, url.clone())
+ }
+
+ pub fn for_central() -> CargoResult<SourceId> {
+ Ok(SourceId::for_registry(&try!(RegistrySource::url())))
}
pub fn get_url(&self) -> &Url {
};
box PathSource::new(&path, self) as Box<Source>
},
- RegistryKind => box DummyRegistrySource::new(self) as Box<Source+'a>,
+ RegistryKind => box RegistrySource::new(self, config) as Box<Source+'a>,
}
}
Ok(())
}
- fn download(&self, packages: &[PackageId]) -> CargoResult<()> {
- for source in self.sources.iter() {
+ fn download(&mut self, packages: &[PackageId]) -> CargoResult<()> {
+ for source in self.sources.mut_iter() {
try!(source.download(packages));
}
Ex => self.is_exact(ver),
Gt => self.is_greater(ver),
GtEq => self.is_exact(ver) || self.is_greater(ver),
- _ => false // not implemented
+ Lt => !self.is_exact(ver) && !self.is_greater(ver),
+ LtEq => !self.is_greater(ver),
}
}
fn is_greater(self, ver: &Version) -> bool {
if self.major != ver.major {
- return self.major > ver.major;
+ return ver.major > self.major;
}
match self.minor {
Some(minor) => {
if minor != ver.minor {
- return minor > ver.minor
+ return ver.minor > minor
}
}
None => return false
match self.patch {
Some(patch) => {
if patch != ver.patch {
- return patch > ver.patch
+ return ver.patch > patch
}
}
LexStart => {
if c.is_whitespace() {
next!(); // Ignore
- }
- else if c.is_alphanumeric() {
+ } else if c.is_alphanumeric() {
self.mark(idx);
self.state = LexAlphaNum;
next!();
- }
- else if is_sigil(c) {
+ } else if is_sigil(c) {
self.mark(idx);
self.state = LexSigil;
next!();
- }
- else if c == '.' {
+ } else if c == '.' {
self.state = LexInit;
return Some(Dot);
- }
- else if c == ',' {
+ } else if c == ',' {
self.state = LexInit;
return Some(Comma);
} else {
}
#[test]
- pub fn test_parsing_exact() {
+ fn test_parsing_exact() {
let r = req("1.0.0");
assert!(r.to_string() == "= 1.0.0".to_string());
}
#[test]
- pub fn test_parsing_greater_than() {
+ fn test_parsing_greater_than() {
let r = req(">= 1.0.0");
assert!(r.to_string() == ">= 1.0.0".to_string());
- assert_match(&r, ["1.0.0"]);
+ assert_match(&r, ["1.0.0", "2.0.0", "1.2.3", "1.4.0"]);
+ assert_not_match(&r, ["0.0.1", "0.9.9"]);
+ }
+
+ #[test]
+ fn test_parsing_less_than() {
+ let r = req("<= 1.0.0");
+
+ assert!(r.to_string() == "<= 1.0.0".to_string());
+
+ assert_not_match(&r, ["2.0.0", "1.2.3", "1.4.0"]);
+ assert_match(&r, ["0.0.1", "0.9.9", "1.0.0"]);
}
/* TODO:
#[phase(plugin)] extern crate regex_macros;
#[phase(plugin, link)] extern crate log;
+extern crate curl;
extern crate docopt;
extern crate flate2;
extern crate git2;
let filename = format!("{}-{}.tar.gz", pkg.get_name(), pkg.get_version());
let dst = pkg.get_manifest_path().dir_path().join(filename);
+ if dst.exists() { return Ok(dst) }
+
try!(shell.status("Packaging", pkg.get_package_id().to_string()));
try!(tar(&pkg, &src, shell, &dst).chain_error(|| {
human("failed to prepare local package for uploading")
}));
-
Ok(dst)
}
internal(format!("could not archive source file `{}`", relative))
}));
}
-
+ try!(ar.finish());
Ok(())
}
--- /dev/null
+use std::collections::HashMap;
+use std::io::File;
+use std::os;
+use std::str;
+use serialize::json;
+
+use curl::http;
+
+use core::source::Source;
+use core::{Package, MultiShell, SourceId};
+use ops;
+use sources::{PathSource, RegistrySource};
+use util::config;
+use util::{CargoResult, human, internal, ChainError, Require, ToUrl};
+use util::config::{Config, Table};
+
+pub struct UploadConfig {
+ pub host: Option<String>,
+ pub token: Option<String>,
+}
+
+pub fn upload(manifest_path: &Path,
+ shell: &mut MultiShell,
+ token: Option<String>,
+ host: Option<String>) -> CargoResult<()> {
+ let mut src = try!(PathSource::for_path(&manifest_path.dir_path()));
+ try!(src.update());
+ let pkg = try!(src.get_root_package());
+
+ // Parse all configuration options
+ let UploadConfig { token: token_config, .. } = try!(upload_configuration());
+ let token = try!(token.or(token_config).require(|| {
+ human("no upload token found, please run `cargo login`")
+ }));
+ let host = host.unwrap_or(try!(RegistrySource::url()).to_string());
+
+ // First, prepare a tarball
+ let tarball = try!(ops::package(manifest_path, shell));
+ let tarball = try!(File::open(&tarball));
+
+ // Upload said tarball to the specified destination
+ try!(shell.status("Uploading", pkg.get_package_id().to_string()));
+ try!(transmit(&pkg, tarball, token.as_slice(),
+ host.as_slice()).chain_error(|| {
+ human(format!("failed to upload package to registry: {}", host))
+ }));
+
+ Ok(())
+}
+
+fn transmit(pkg: &Package, mut tarball: File,
+ token: &str, host: &str) -> CargoResult<()> {
+ let stat = try!(tarball.stat());
+ let url = try!(host.to_url().map_err(human));
+ let registry_src = SourceId::for_registry(&url);
+
+ let url = format!("{}/packages/new", host.trim_right_chars('/'));
+ let mut handle = http::handle();
+ let mut req = handle.post(url.as_slice(), &mut tarball)
+ .content_length(stat.size as uint)
+ .content_type("application/x-tar")
+ .header("Content-Encoding", "x-gzip")
+ .header("X-Cargo-Auth", token)
+ .header("X-Cargo-Pkg-Name", pkg.get_name())
+ .header("X-Cargo-Pkg-Version",
+ pkg.get_version().to_string().as_slice());
+
+ let mut dep_header = String::new();
+ for (i, dep) in pkg.get_dependencies().iter().enumerate() {
+ if !dep.is_transitive() { continue }
+ if dep.get_source_id() != ®istry_src {
+ return Err(human(format!("All dependencies must come from the \
+ same registry.\nDependency `{}` comes \
+ from {} instead", dep.get_name(),
+ dep.get_source_id())))
+ }
+ let header = format!("{}|{}", dep.get_name(), dep.get_version_req());
+ if i > 0 { dep_header.push_str(";"); }
+ dep_header.push_str(header.as_slice());
+ }
+ req = req.header("X-Cargo-Pkg-Dep", dep_header.as_slice());
+
+ let response = try!(req.exec());
+
+ if response.get_code() != 200 {
+ return Err(internal(format!("failed to get a 200 response: {}",
+ response)))
+ }
+
+ let body = try!(str::from_utf8(response.get_body()).require(|| {
+ internal("failed to get a utf-8 response")
+ }));
+
+ #[deriving(Decodable)]
+ struct Response { ok: bool }
+ #[deriving(Decodable)]
+ struct BadResponse { error: String }
+ let json = try!(json::decode::<Response>(body));
+ if json.ok { return Ok(()) }
+
+ let json = try!(json::decode::<BadResponse>(body));
+ Err(human(format!("failed to upload `{}`: {}", pkg, json.error)))
+}
+
+pub fn upload_configuration() -> CargoResult<UploadConfig> {
+ let configs = try!(config::all_configs(os::getcwd()));
+ let registry = match configs.find_equiv(&"registry") {
+ None => return Ok(UploadConfig { host: None, token: None }),
+ Some(registry) => try!(registry.table().chain_error(|| {
+ internal("invalid configuration for the key `registry`")
+ })),
+ };
+ let host = match registry.find_equiv(&"host") {
+ None => None,
+ Some(host) => {
+ Some(try!(host.string().chain_error(|| {
+ internal("invalid configuration for key `host`")
+ })).ref0().to_string())
+ }
+ };
+ let token = match registry.find_equiv(&"token") {
+ None => None,
+ Some(token) => {
+ Some(try!(token.string().chain_error(|| {
+ internal("invalid configuration for key `token`")
+ })).ref0().to_string())
+ }
+ };
+ Ok(UploadConfig { host: host, token: token })
+}
+
+pub fn upload_login(shell: &mut MultiShell, token: String) -> CargoResult<()> {
+ let config = try!(Config::new(shell, None, None));
+ let UploadConfig { host, token: _ } = try!(upload_configuration());
+ let mut map = HashMap::new();
+ let p = os::getcwd();
+ match host {
+ Some(host) => {
+ map.insert("host".to_string(), config::String(host, p.clone()));
+ }
+ None => {}
+ }
+ map.insert("token".to_string(), config::String(token, p));
+
+ config::set_config(&config, config::Global, "registry", config::Table(map))
+}
pub use self::cargo_generate_lockfile::{update_lockfile, load_lockfile};
pub use self::cargo_test::{run_tests, run_benches, TestOptions};
pub use self::cargo_package::package;
+pub use self::cargo_upload::{upload, upload_configuration, UploadConfig};
+pub use self::cargo_upload::upload_login;
mod cargo_clean;
mod cargo_compile;
mod cargo_generate_lockfile;
mod cargo_test;
mod cargo_package;
+mod cargo_upload;
self.path_source.as_mut().unwrap().update()
}
- fn download(&self, _: &[PackageId]) -> CargoResult<()> {
+ fn download(&mut self, _: &[PackageId]) -> CargoResult<()> {
// TODO: assert! that the PackageId is contained by the source
Ok(())
}
pub use self::path::PathSource;
pub use self::git::GitSource;
-pub use self::registry::DummyRegistrySource;
+pub use self::registry::RegistrySource;
pub mod path;
pub mod git;
Ok(())
}
- fn download(&self, _: &[PackageId]) -> CargoResult<()>{
+ fn download(&mut self, _: &[PackageId]) -> CargoResult<()>{
// TODO: assert! that the PackageId is contained by the source
Ok(())
}
+#![allow(unused)]
+use std::io::{mod, fs, File, MemReader};
+use curl::http;
+use git2;
use semver::Version;
+use flate2::reader::GzDecoder;
+use serialize::json;
+use tar::Archive;
+use url::Url;
use core::{Source, SourceId, PackageId, Package, Summary, Registry};
use core::Dependency;
-use util::CargoResult;
+use sources::PathSource;
+use util::{CargoResult, Config, internal, ChainError, ToUrl, human};
+use util::{hex, Require};
+use ops;
-pub struct DummyRegistrySource {
- id: SourceId,
+static CENTRAL: &'static str = "https://example.com";
+
+pub struct RegistrySource<'a, 'b:'a> {
+ source_id: SourceId,
+ checkout_path: Path,
+ cache_path: Path,
+ src_path: Path,
+ config: &'a mut Config<'b>,
+ handle: http::Handle,
+ sources: Vec<PathSource>,
+}
+
+#[deriving(Decodable)]
+struct RegistryConfig {
+ dl_url: String,
}
-impl DummyRegistrySource {
- pub fn new(id: &SourceId) -> DummyRegistrySource {
- DummyRegistrySource { id: id.clone() }
+impl<'a, 'b> RegistrySource<'a, 'b> {
+ pub fn new(source_id: &SourceId,
+ config: &'a mut Config<'b>) -> RegistrySource<'a, 'b> {
+ let hash = hex::short_hash(source_id);
+ let ident = source_id.get_url().host().unwrap().to_string();
+ let part = format!("{}-{}", ident, hash);
+ RegistrySource {
+ checkout_path: config.registry_index_path().join(part.as_slice()),
+ cache_path: config.registry_cache_path().join(part.as_slice()),
+ src_path: config.registry_source_path().join(part.as_slice()),
+ config: config,
+ source_id: source_id.clone(),
+ handle: http::Handle::new(),
+ sources: Vec::new(),
+ }
+ }
+
+ /// Get the configured default registry URL.
+ ///
+ /// This is the main cargo registry by default, but it can be overridden in
+ /// a .cargo/config
+ pub fn url() -> CargoResult<Url> {
+ let config = try!(ops::upload_configuration());
+ let url = config.host.unwrap_or(CENTRAL.to_string());
+ url.as_slice().to_url().map_err(human)
+ }
+
+ /// Translates the HTTP url of the registry to the git URL
+ fn git_url(&self) -> Url {
+ let mut url = self.source_id.get_url().clone();
+ url.path_mut().unwrap().push("git".to_string());
+ url.path_mut().unwrap().push("index".to_string());
+ url
+ }
+
+ /// Decode the configuration stored within the registry.
+ ///
+ /// This requires that the index has been at least checked out.
+ fn config(&self) -> CargoResult<RegistryConfig> {
+ let mut f = try!(File::open(&self.checkout_path.join("config.json")));
+ let contents = try!(f.read_to_string());
+ let config = try!(json::decode(contents.as_slice()));
+ Ok(config)
+ }
+
+ /// Open the git repository for the index of the registry.
+ ///
+ /// This will attempt to open an existing checkout, and failing that it will
+ /// initialize a fresh new directory and git checkout. No remotes will be
+ /// configured by default.
+ fn open(&self) -> CargoResult<git2::Repository> {
+ match git2::Repository::open(&self.checkout_path) {
+ Ok(repo) => return Ok(repo),
+ Err(..) => {}
+ }
+
+ try!(fs::mkdir_recursive(&self.checkout_path, io::UserDir));
+ let _ = fs::rmdir_recursive(&self.checkout_path);
+ let url = self.git_url().to_string();
+ let repo = try!(git2::Repository::init(&self.checkout_path));
+ Ok(repo)
+ }
+
+ /// Download the given package from the given url into the local cache.
+ ///
+ /// This will perform the HTTP request to fetch the package. This function
+ /// will only succeed if the HTTP download was successful and the file is
+ /// then ready for inspection.
+ ///
+ /// No action is taken if the package is already downloaded.
+ fn download_package(&mut self, pkg: &PackageId, url: Url)
+ -> CargoResult<Path> {
+ let dst = self.cache_path.join(url.path().unwrap().last().unwrap()
+ .as_slice());
+ if dst.exists() { return Ok(dst) }
+ try!(self.config.shell().status("Downloading", pkg));
+
+ try!(fs::mkdir_recursive(&dst.dir_path(), io::UserDir));
+ // TODO: don't download into memory
+ let resp = try!(self.handle.get(url.to_string()).exec());
+ if resp.get_code() != 200 {
+ return Err(internal(format!("Failed to get 200 reponse from {}\n{}",
+ url, resp)))
+ }
+ try!(File::create(&dst).write(resp.get_body()));
+ Ok(dst)
+ }
+
+ /// Unpacks a downloaded package into a location where it's ready to be
+ /// compiled.
+ ///
+ /// No action is taken if the source looks like it's already unpacked.
+ fn unpack_package(&self, pkg: &PackageId, tarball: Path)
+ -> CargoResult<Path> {
+ let dst = self.src_path.join(format!("{}-{}", pkg.get_name(),
+ pkg.get_version()));
+ if dst.join(".cargo-ok").exists() { return Ok(dst) }
+
+ try!(fs::mkdir_recursive(&dst.dir_path(), io::UserDir));
+ let f = try!(File::open(&tarball));
+ let mut gz = try!(GzDecoder::new(f));
+ // TODO: don't read into memory
+ let mem = try!(gz.read_to_end());
+ let tar = Archive::new(MemReader::new(mem));
+ for file in try!(tar.files()) {
+ let mut file = try!(file);
+ let dst = dst.dir_path().join(file.filename_bytes());
+ try!(fs::mkdir_recursive(&dst.dir_path(), io::UserDir));
+ let mut dst = try!(File::create(&dst));
+ try!(io::util::copy(&mut file, &mut dst));
+ }
+ try!(File::create(&dst.join(".cargo-ok")));
+ Ok(dst)
}
}
-impl Registry for DummyRegistrySource {
- // This is a hack to get tests to pass, this is just a dummy registry.
+impl<'a, 'b> Registry for RegistrySource<'a, 'b> {
fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
- let mut version = Version {
- major: 0, minor: 0, patch: 0,
- pre: Vec::new(), build: Vec::new(),
+ let path = &self.checkout_path;
+ let mut chars = dep.get_name().chars();
+ let path = path.join(format!("{}{}", chars.next().unwrap_or('X'),
+ chars.next().unwrap_or('X')));
+ let path = path.join(format!("{}{}", chars.next().unwrap_or('X'),
+ chars.next().unwrap_or('X')));
+ let path = path.join(dep.get_name());
+ let contents = match File::open(&path) {
+ Ok(mut f) => try!(f.read_to_string()),
+ Err(..) => return Ok(Vec::new()),
};
- for i in range(0, 10) {
- version.minor = i;
- if dep.get_version_req().matches(&version) { break }
- }
- let pkgid = PackageId::new(dep.get_name().as_slice(),
- version,
- &self.id).unwrap();
- Ok(vec![Summary::new(&pkgid, [])])
+
+ let ret: CargoResult<Vec<Summary>>;
+ ret = contents.as_slice().lines().filter(|l| l.trim().len() > 0)
+ .map(|l| {
+ #[deriving(Decodable)]
+ struct Package { name: String, vers: String, deps: Vec<String> }
+
+ let pkg = try!(json::decode::<Package>(l));
+ let pkgid = try!(PackageId::new(pkg.name.as_slice(),
+ pkg.vers.as_slice(),
+ &self.source_id));
+ let deps: CargoResult<Vec<Dependency>> = pkg.deps.iter().map(|dep| {
+ let mut parts = dep.as_slice().splitn(1, '|');
+ let name = parts.next().unwrap();
+ let vers = try!(parts.next().require(|| {
+ human(format!("malformed dependency in registry: {}", dep))
+ }));
+ Dependency::parse(name, Some(vers), &self.source_id)
+ }).collect();
+ let deps = try!(deps);
+ Ok(Summary::new(&pkgid, deps.as_slice()))
+ }).collect();
+ let mut summaries = try!(ret.chain_error(|| {
+ internal(format!("Failed to parse registry's information for: {}",
+ dep.get_name()))
+ }));
+ summaries.query(dep)
}
}
-impl Source for DummyRegistrySource {
- fn update(&mut self) -> CargoResult<()> { Ok(()) }
- fn download(&self, _packages: &[PackageId]) -> CargoResult<()> { Ok(()) }
- fn get(&self, _packages: &[PackageId]) -> CargoResult<Vec<Package>> {
- Ok(Vec::new())
+impl<'a, 'b> Source for RegistrySource<'a, 'b> {
+ fn update(&mut self) -> CargoResult<()> {
+ try!(self.config.shell().status("Updating",
+ format!("registry `{}`", self.source_id.get_url())));
+ let repo = try!(self.open());
+
+ // git fetch origin
+ let url = self.git_url().to_string();
+ let refspec = "refs/heads/*:refs/remotes/origin/*";
+ let mut remote = try!(repo.remote_create_anonymous(url.as_slice(),
+ refspec));
+ log!(5, "[{}] fetching {}", self.source_id, url);
+ try!(remote.fetch(None, None).chain_error(|| {
+ internal(format!("failed to fetch `{}`", url))
+ }));
+
+ // git reset --hard origin/master
+ let reference = "refs/remotes/origin/master";
+ let oid = try!(git2::Reference::name_to_id(&repo, reference));
+ log!(5, "[{}] updating to rev {}", self.source_id, oid);
+ let object = try!(git2::Object::lookup(&repo, oid, None));
+ try!(repo.reset(&object, git2::Hard, None, None));
+ Ok(())
}
- fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> {
- unimplemented!()
+
+ fn download(&mut self, packages: &[PackageId]) -> CargoResult<()> {
+ let config = try!(self.config());
+ let url = try!(config.dl_url.as_slice().to_url().map_err(internal));
+ for package in packages.iter() {
+ if self.source_id != *package.get_source_id() { continue }
+
+ let mut url = url.clone();
+ url.path_mut().unwrap().push("pkg".to_string());
+ url.path_mut().unwrap().push(package.get_name().to_string());
+ url.path_mut().unwrap().push(format!("{}-{}.tar.gz",
+ package.get_name(),
+ package.get_version()));
+ let path = try!(self.download_package(package, url).chain_error(|| {
+ internal(format!("Failed to download package `{}`", package))
+ }));
+ let path = try!(self.unpack_package(package, path).chain_error(|| {
+ internal(format!("Failed to unpack package `{}`", package))
+ }));
+ let mut src = PathSource::new(&path, &self.source_id);
+ try!(src.update());
+ self.sources.push(src);
+ }
+ Ok(())
+ }
+
+ fn get(&self, packages: &[PackageId]) -> CargoResult<Vec<Package>> {
+ let mut ret = Vec::new();
+ for src in self.sources.iter() {
+ ret.extend(try!(src.get(packages)).move_iter());
+ }
+ return Ok(ret);
+ }
+
+ fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
+ Ok(pkg.get_package_id().get_version().to_string())
}
}
self.home_path.join(".cargo").join("git").join("checkouts")
}
+ pub fn registry_index_path(&self) -> Path {
+ self.home_path.join(".cargo").join("registry").join("index")
+ }
+
+ pub fn registry_cache_path(&self) -> Path {
+ self.home_path.join(".cargo").join("registry").join("cache")
+ }
+
+ pub fn registry_source_path(&self) -> Path {
+ self.home_path.join(".cargo").join("registry").join("src")
+ }
+
pub fn shell(&mut self) -> &mut MultiShell {
&mut *self.shell
}
Boolean(..) => "boolean",
}
}
+
+ fn into_toml(self) -> toml::Value {
+ match self {
+ Boolean(s, _) => toml::Boolean(s),
+ String(s, _) => toml::String(s),
+ List(l) => toml::Array(l.move_iter().map(|(s, _)| toml::String(s))
+ .collect()),
+ Table(l) => toml::Table(l.move_iter()
+ .map(|(k, v)| (k, v.into_toml()))
+ .collect()),
+ }
+ }
}
pub fn get_config(pwd: Path, key: &str) -> CargoResult<ConfigValue> {
}
fn find_in_tree<T>(pwd: &Path,
- walk: |io::fs::File| -> CargoResult<T>) -> CargoResult<T> {
+ walk: |File| -> CargoResult<T>) -> CargoResult<T> {
let mut current = pwd.clone();
loop {
let possible = current.join(".cargo").join("config");
if possible.exists() {
- let file = try!(io::fs::File::open(&possible));
+ let file = try!(File::open(&possible));
match walk(file) {
Ok(res) => return Ok(res),
}
fn walk_tree(pwd: &Path,
- walk: |io::fs::File| -> CargoResult<()>) -> CargoResult<()> {
+ walk: |File| -> CargoResult<()>) -> CargoResult<()> {
let mut current = pwd.clone();
let mut err = false;
loop {
let possible = current.join(".cargo").join("config");
if possible.exists() {
- let file = try!(io::fs::File::open(&possible));
+ let file = try!(File::open(&possible));
match walk(file) {
Err(_) => err = false,
Ok(())
}
-fn extract_config(mut file: io::fs::File, key: &str) -> CargoResult<ConfigValue> {
+fn extract_config(mut file: File, key: &str) -> CargoResult<ConfigValue> {
let contents = try!(file.read_to_string());
let mut toml = try!(cargo_toml::parse(contents.as_slice(), file.path()));
let val = try!(toml.pop(&key.to_string()).require(|| internal("")));
ConfigValue::from_toml(file.path(), val)
}
+
+pub fn set_config(cfg: &Config, loc: Location, key: &str,
+ value: ConfigValue) -> CargoResult<()> {
+ // TODO: There are a number of drawbacks here
+ //
+ // 1. Project is unimplemented
+ // 2. This blows away all comments in a file
+ // 3. This blows away the previous ordering of a file.
+ let file = match loc {
+ Global => cfg.home_path.join(".cargo").join("config"),
+ Project => unimplemented!(),
+ };
+ let contents = File::open(&file).read_to_string().unwrap_or(String::new());
+ let mut toml = try!(cargo_toml::parse(contents.as_slice(), &file));
+ toml.insert(key.to_string(), value.into_toml());
+ try!(File::create(&file).write(toml::Table(toml).to_string().as_bytes()));
+ Ok(())
+}
use std::io::IoError;
use std::fmt::{mod, Show, Formatter, FormatError};
use std::str;
+use serialize::json;
+use curl;
use docopt;
use toml::Error as TomlError;
use url;
from_error!(FormatError)
+impl CargoError for curl::ErrCode {
+ fn description(&self) -> String { self.to_string() }
+}
+
+from_error!(curl::ErrCode)
+
+impl CargoError for json::DecoderError {
+ fn description(&self) -> String { self.to_string() }
+}
+
+from_error!(json::DecoderError)
+
pub struct ProcessError {
pub msg: String,
pub exit: Option<ProcessExit>,
for (n, v) in dependencies.iter() {
let (version, source_id) = match *v {
SimpleDep(ref string) => {
- (Some(string.clone()), SourceId::for_central())
+ (Some(string.clone()), try!(SourceId::for_central()))
},
DetailedDep(ref details) => {
let reference = details.branch.clone()
cx.source_id.clone()
})
}
- }.unwrap_or(SourceId::for_central());
+ }.unwrap_or(try!(SourceId::for_central()));
(details.version.clone(), new_source_id)
}
use std::path;
use support::{ResultTest, project, execs, main_file, basic_bin_manifest};
-use support::{COMPILING, RUNNING, cargo_dir, ProjectBuilder, path2url};
+use support::{COMPILING, RUNNING, cargo_dir, ProjectBuilder};
use hamcrest::{assert_that, existing_file};
use support::paths::PathExt;
use cargo;
-use cargo::util::{process, realpath};
+use cargo::util::process;
fn setup() {
}
test!(cargo_compile_with_warnings_in_a_dep_package {
let mut p = project("foo");
- let bar = p.root().join("bar");
p = p
- .file(".cargo/config", format!(r#"
- paths = ['{}']
- "#, bar.display()).as_slice())
.file("Cargo.toml", r#"
[project]
version = "0.5.0"
authors = ["wycats@example.com"]
- [dependencies]
-
- bar = "0.5.0"
+ [dependencies.bar]
+ path = "bar"
[[bin]]
fn dead() {}
"#);
- let bar = realpath(&p.root().join("bar")).assert();
- let main = realpath(&p.root()).assert();
-
assert_that(p.cargo_process("build"),
execs()
.with_stdout(format!("{} bar v0.5.0 ({})\n\
{} foo v0.5.0 ({})\n",
- COMPILING, path2url(bar),
- COMPILING, path2url(main)))
+ COMPILING, p.url(),
+ COMPILING, p.url()))
.with_stderr(""));
assert_that(&p.bin("foo"), existing_file());
})
test!(cargo_compile_with_nested_deps_inferred {
- let mut p = project("foo");
- let bar = p.root().join("bar");
- let baz = p.root().join("baz");
-
- p = p
- .file(".cargo/config", format!(r#"
- paths = ['{}', '{}']
- "#, bar.display(), baz.display()).as_slice())
+ let p = project("foo")
.file("Cargo.toml", r#"
[project]
version = "0.5.0"
authors = ["wycats@example.com"]
- [dependencies]
-
- bar = "0.5.0"
+ [dependencies.bar]
+ path = 'bar'
[[bin]]
-
name = "foo"
"#)
.file("src/foo.rs",
version = "0.5.0"
authors = ["wycats@example.com"]
- [dependencies]
-
- baz = "0.5.0"
+ [dependencies.baz]
+ path = "../baz"
"#)
.file("bar/src/lib.rs", r#"
extern crate baz;
})
test!(cargo_compile_with_nested_deps_correct_bin {
- let mut p = project("foo");
- let bar = p.root().join("bar");
- let baz = p.root().join("baz");
-
- p = p
- .file(".cargo/config", format!(r#"
- paths = ['{}', '{}']
- "#, bar.display(), baz.display()).as_slice())
+ let p = project("foo")
.file("Cargo.toml", r#"
[project]
version = "0.5.0"
authors = ["wycats@example.com"]
- [dependencies]
-
- bar = "0.5.0"
+ [dependencies.bar]
+ path = "bar"
[[bin]]
-
name = "foo"
"#)
.file("src/main.rs",
version = "0.5.0"
authors = ["wycats@example.com"]
- [dependencies]
-
- baz = "0.5.0"
+ [dependencies.baz]
+ path = "../baz"
"#)
.file("bar/src/lib.rs", r#"
extern crate baz;
})
test!(cargo_compile_with_nested_deps_shorthand {
- let mut p = project("foo");
- let bar = p.root().join("bar");
- let baz = p.root().join("baz");
-
- p = p
- .file(".cargo/config", format!(r#"
- paths = ['{}', '{}']
- "#, bar.display(), baz.display()).as_slice())
+ let p = project("foo")
.file("Cargo.toml", r#"
[project]
version = "0.5.0"
authors = ["wycats@example.com"]
- [dependencies]
-
- bar = "0.5.0"
+ [dependencies.bar]
+ path = "bar"
[[bin]]
version = "0.5.0"
authors = ["wycats@example.com"]
- [dependencies]
-
- baz = "0.5.0"
+ [dependencies.baz]
+ path = "../baz"
[lib]
})
test!(cargo_compile_with_nested_deps_longhand {
- let mut p = project("foo");
- let bar = p.root().join("bar");
- let baz = p.root().join("baz");
-
- p = p
- .file(".cargo/config", format!(r#"
- paths = ['{}', '{}']
- "#, bar.display(), baz.display()).as_slice())
+ let p = project("foo")
.file("Cargo.toml", r#"
[project]
version = "0.5.0"
authors = ["wycats@example.com"]
- [dependencies]
-
- bar = "0.5.0"
+ [dependencies.bar]
+ path = "bar"
+ version = "0.5.0"
[[bin]]
authors = ["wycats@example.com"]
[dependencies.baz]
-
+ path = "../baz"
version = "0.5.0"
[lib]
test!(custom_build_in_dependency {
let mut p = project("foo");
- let bar = p.root().join("bar");
let mut build = project("builder");
build = build
.file("Cargo.toml", r#"
p = p
- .file(".cargo/config", format!(r#"
- paths = ['{}']
- "#, bar.display()).as_slice())
.file("Cargo.toml", r#"
[project]
[[bin]]
name = "foo"
- [dependencies]
- bar = "0.5.0"
+ [dependencies.bar]
+ path = "bar"
"#)
.file("src/foo.rs", r#"
extern crate bar;
use std::io::{fs, File, UserRWX};
-use support::{ResultTest, project, execs, main_file, cargo_dir, path2url};
+use support::{ResultTest, project, execs, main_file, cargo_dir};
use support::{COMPILING, RUNNING};
use support::paths::{mod, PathExt};
use hamcrest::{assert_that, existing_file};
test!(no_rebuild_dependency {
let mut p = project("foo");
- let bar = p.root().join("bar");
p = p
- .file(".cargo/config", format!(r#"
- paths = ['{}']
- "#, bar.display()).as_slice())
.file("Cargo.toml", r#"
[project]
authors = ["wycats@example.com"]
[[bin]] name = "foo"
- [dependencies] bar = "0.5.0"
+ [dependencies.bar] path = "bar"
"#)
.file("src/foo.rs", r#"
extern crate bar;
.file("bar/src/bar.rs", r#"
pub fn bar() {}
"#);
- let bar = path2url(bar);
// First time around we should compile both foo and bar
assert_that(p.cargo_process("build"),
execs().with_stdout(format!("{} bar v0.5.0 ({})\n\
{} foo v0.5.0 ({})\n",
- COMPILING, bar,
+ COMPILING, p.url(),
COMPILING, p.url())));
// This time we shouldn't compile bar
assert_that(p.process(cargo_dir().join("cargo")).arg("build"),
assert_that(p.process(cargo_dir().join("cargo")).arg("build"),
execs().with_stdout(format!("{} bar v0.5.0 ({})\n\
{} foo v0.5.0 ({})\n",
- COMPILING, bar,
+ COMPILING, p.url(),
COMPILING, p.url())));
})
test!(deep_dependencies_trigger_rebuild {
let mut p = project("foo");
- let bar = p.root().join("bar");
- let baz = p.root().join("baz");
p = p
- .file(".cargo/config", format!(r#"
- paths = ['{}', '{}']
- "#, bar.display(), baz.display()).as_slice())
.file("Cargo.toml", r#"
[project]
[[bin]]
name = "foo"
- [dependencies]
- bar = "0.5.0"
+ [dependencies.bar]
+ path = "bar"
"#)
.file("src/foo.rs", r#"
extern crate bar;
[lib]
name = "bar"
- [dependencies]
- baz = "0.5.0"
+ [dependencies.baz]
+ path = "../baz"
"#)
.file("bar/src/bar.rs", r#"
extern crate baz;
.file("baz/src/baz.rs", r#"
pub fn baz() {}
"#);
- let baz = path2url(baz);
- let bar = path2url(bar);
assert_that(p.cargo_process("build"),
execs().with_stdout(format!("{} baz v0.5.0 ({})\n\
{} bar v0.5.0 ({})\n\
{} foo v0.5.0 ({})\n",
- COMPILING, baz,
- COMPILING, bar,
+ COMPILING, p.url(),
+ COMPILING, p.url(),
COMPILING, p.url())));
assert_that(p.process(cargo_dir().join("cargo")).arg("build"),
execs().with_stdout(""));
execs().with_stdout(format!("{} baz v0.5.0 ({})\n\
{} bar v0.5.0 ({})\n\
{} foo v0.5.0 ({})\n",
- COMPILING, baz,
- COMPILING, bar,
+ COMPILING, p.url(),
+ COMPILING, p.url(),
COMPILING, p.url())));
// Make sure an update to bar doesn't trigger baz
assert_that(p.process(cargo_dir().join("cargo")).arg("build"),
execs().with_stdout(format!("{} bar v0.5.0 ({})\n\
{} foo v0.5.0 ({})\n",
- COMPILING, bar,
+ COMPILING, p.url(),
COMPILING, p.url())));
})
test!(no_rebuild_two_deps {
let mut p = project("foo");
- let bar = p.root().join("bar");
- let baz = p.root().join("baz");
p = p
- .file(".cargo/config", format!(r#"
- paths = ['{}', '{}']
- "#, bar.display(), baz.display()).as_slice())
.file("Cargo.toml", r#"
[project]
[[bin]]
name = "foo"
- [dependencies]
- bar = "0.5.0"
- baz = "0.5.0"
+ [dependencies.bar]
+ path = "bar"
+ [dependencies.baz]
+ path = "baz"
"#)
.file("src/foo.rs", r#"
extern crate bar;
[lib]
name = "bar"
- [dependencies]
- baz = "0.5.0"
+ [dependencies.baz]
+ path = "../baz"
"#)
.file("bar/src/bar.rs", r#"
pub fn bar() {}
.file("baz/src/baz.rs", r#"
pub fn baz() {}
"#);
- let baz = path2url(baz);
- let bar = path2url(bar);
assert_that(p.cargo_process("build"),
execs().with_stdout(format!("{} baz v0.5.0 ({})\n\
{} bar v0.5.0 ({})\n\
{} foo v0.5.0 ({})\n",
- COMPILING, baz,
- COMPILING, bar,
+ COMPILING, p.url(),
+ COMPILING, p.url(),
COMPILING, p.url())));
assert_that(&p.bin("foo"), existing_file());
assert_that(p.process(cargo_dir().join("cargo")).arg("build"),
authors = []
version = "0.0.1"
"#)
- .file("src/main.rs", "fn main() {}");
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [package]
+ name = "bar"
+ authors = []
+ version = "0.0.1"
+ "#)
+ .file("bar/src/lib.rs", "");
assert_that(p.cargo_process("generate-lockfile"),
execs().with_status(0));
authors = []
version = "0.0.1"
- [dependencies]
- bar = "0.5.0"
+ [dependencies.bar]
+ path = "bar"
"#).assert();
assert_that(p.process(cargo_dir().join("cargo")).arg("generate-lockfile"),
execs().with_status(0));
assert!(lock1 != lock2);
// change the dep
- File::create(&toml).write_str(r#"
+ File::create(&p.root().join("bar/Cargo.toml")).write_str(r#"
[package]
- name = "foo"
+ name = "bar"
authors = []
- version = "0.0.1"
-
- [dependencies]
- bar = "0.2.0"
+ version = "0.0.2"
"#).assert();
assert_that(p.process(cargo_dir().join("cargo")).arg("generate-lockfile"),
execs().with_status(0));